jsmpeg学习笔记 您所在的位置:网站首页 canvas options jsmpeg学习笔记

jsmpeg学习笔记

#jsmpeg学习笔记| 来源: 网络整理| 查看: 265

一、概述

参考HTML5 视频直播(二)我想在egret里面 引入第三方jsmpg库1.jsmpeg 是一个 MPEG1 解码器,它是由作者从零编写出来的,并不像 Broadway 那样是从其他语言翻译而成,所以代码可读性要好很多,代码也更轻量级。jsmpeg 对视频文件编码方式有一些要求:不支持 B-Frames,视频宽度必须是 2 的倍数。readme中原文如下:JSMpeg only supports playback of MPEG-TS containers with the MPEG1 Video Codec and the MP2 Audio Codec. The Video Decoder does not handle B-Frames correctly (though no modern encoder seems to use these by default anyway) and the width of the video has to be a multiple of 2.

You can encode a suitable video using ffmpeg like this:

ffmpeg -i in.mp4 -f mpegts -codec:v mpeg1video -codec:a mp2 -b 0 out.ts

2.使用示例

var canvas = document.getElementById('video-canvas'); var url = 'ws://'+document.location.hostname+':8082/'; var player = new JSMpeg.Player(url, {canvas: canvas});

或者

3.以下参考在手机web中播放视频(使用js,不使用video标签,支持直播)jsmpeg只支持mpeg格式的视频,jsmpeg官方建议用ffmpeg来转格式。ffmpeg下载地址

从把视频转成mpg格式(转出的视频无音频,且应该是支持从许多种格式转过来的,我暂时只试过从mp4转)./ffmpeg -i video.mp4 -f mpeg1video -vf "crop=iw-mod(iw\,2):ih-mod(ih\,2)" -b 0 video.mpg

从视频中提取音频(上面说过jsmpeg不能播声音,所以声音要单独弄出来)./ffmpeg -i video.mp4 -f mp3 -vn video.mp3

二、源码

需要对照readme里的参数来阅读源码The url argument accepts a URL to an MPEG .ts file or a WebSocket server (ws://...).

The options argument supports the following properties:

canvas – the HTML Canvas elment to use for video rendering. If none is given, the renderer will create its own Canvas element. loop – whether to loop the video (static files only). Default true. autoplay - whether to start playing immediately (static files only). Default false. audio - whether to decode audio. Default true. video - whether to decode video. Default true. poster – URL to an image to use as the poster to show before the video plays. pauseWhenHidden – whether to pause playback when the tab is inactive. Default true. Note that browsers usually throttle JS in inactive tabs anyway. disableGl - whether to disable WebGL and always use the Canvas2D renderer. Default false. preserveDrawingBuffer – whether the WebGL context is created with preserveDrawingBuffer - necessary for "screenshots" via canvas.toDataURL(). Default false. progressive - whether to load data in chunks (static files only). When enabled, playback can begin before the whole source has been completely loaded. Default true.边下边播 throttled - when using progressive, whether to defer loading chunks when they're not needed for playback yet. Default true. chunkSize - when using progressive, the chunk size in bytes to load at a time. Default 1024*1024 (1mb). decodeFirstFrame - whether to decode and display the first frame of the video. Useful to set up the Canvas size and use the frame as the "poster" image. This has no effect when using autoplay or streaming sources. Default true. maxAudioLag – when streaming, the maximum enqueued audio length in seconds. videoBufferSize – when streaming, size in bytes for the video decode buffer. Default 512*1024 (512kb). You may have to increase this for very high bitrates. audioBufferSize – when streaming, size in bytes for the audio decode buffer. Default 128*1024 (128kb). You may have to increase this for very high bitrates.

All options except from canvas can also be used with the HTML Element through data- attributes. E.g. to specify looping and autoplay in JavaScript:

1.jsmpeg.js

// The Player sets up the connections between source, demuxer, decoders, // renderer and audio output. It ties everything together, is responsible // of scheduling decoding and provides some convenience methods for // external users. Player: null,

2.player.js

var Player = function(url, options) { //不同的source if (options.source) { this.source = new options.source(url, options); options.streaming = !!this.source.streaming; } else if (url.match(/^wss?:\/\//)) { this.source = new JSMpeg.Source.WebSocket(url, options); options.streaming = true; } else if (options.progressive !== false) { this.source = new JSMpeg.Source.AjaxProgressive(url, options); options.streaming = false; } else { this.source = new JSMpeg.Source.Ajax(url, options); options.streaming = false; } this.demuxer = new JSMpeg.Demuxer.TS(options); this.source.connect(this.demuxer); if (options.video !== false) { this.video = new JSMpeg.Decoder.MPEG1Video(options); //WebGl模式还是Canvas模式 this.renderer = !options.disableGl && JSMpeg.Renderer.WebGL.IsSupported() ? new JSMpeg.Renderer.WebGL(options) : new JSMpeg.Renderer.Canvas2D(options); this.demuxer.connect(JSMpeg.Demuxer.TS.STREAM.VIDEO_1, this.video); this.video.connect(this.renderer); } if (options.audio !== false && JSMpeg.AudioOutput.WebAudio.IsSupported()) { this.audio = new JSMpeg.Decoder.MP2Audio(options); this.audioOut = new JSMpeg.AudioOutput.WebAudio(options); this.demuxer.connect(JSMpeg.Demuxer.TS.STREAM.AUDIO_1, this.audio); this.audio.connect(this.audioOut); } }

this.demuxer.connect(JSMpeg.Demuxer.TS.STREAM.VIDEO_1, this.video);会把video作为destination参数存入pesPacketInfo。

在write方法中会调用 parsePacket方法(谁来调用write方法在最后讲),parsePacket方法中达到packetComplete,接着调用 pi.destination.write(pi.pts, pi.buffers);再去video中看看write方法:

MPEG1.prototype.write = function(pts, buffers) { JSMpeg.Decoder.Base.prototype.write.call(this, pts, buffers); if (!this.hasSequenceHeader) { if (this.bits.findStartCode(MPEG1.START.SEQUENCE) === -1) { return false; } this.decodeSequenceHeader(); if (this.decodeFirstFrame) { this.decode(); } } };

最终调用了decode方法

MPEG1.prototype.decode = function() { if (!this.hasSequenceHeader) { return false; } if (this.bits.findStartCode(MPEG1.START.PICTURE) === -1) { var bufferedBytes = this.bits.byteLength - (this.bits.index >> 3); return false; } this.decodePicture(); this.advanceDecodedTime(1/this.frameRate); return true; };

在decodePicture方法中,有段代码 :

if (this.destination) { this.destination.render(this.currentY, this.currentCr, this.currentCb); }

这个destination就是这段代码指定的渲染器

this.renderer = !options.disableGl && JSMpeg.Renderer.WebGL.IsSupported() ? new JSMpeg.Renderer.WebGL(options) : new JSMpeg.Renderer.Canvas2D(options);

比如来看一下canvas2d.js中的render

CanvasRenderer.prototype.render = function(y, cb, cr) { this.YCbCrToRGBA(y, cb, cr, this.imageData.data); this.context.putImageData(this.imageData, 0, 0); };

可以看到,putImageData会把图片一张张渲染上去

回到一开始的问题,谁来调用write方法:看一下websocket.js

WSSource.prototype.onMessage = function(ev) { if (this.destination) { this.destination.write(ev.data); } }; WSSource.prototype.connect = function(destination) { this.destination = destination; };

那么哪里调用了connect方法呢

this.demuxer = new JSMpeg.Demuxer.TS(options); this.source.connect(this.demuxer); ... this.source.start();

最终,这个流程串连起来

3.websocket.js

WSSource.prototype.connect = function(destination) { this.destination = destination; }; WSSource.prototype.onMessage = function(ev) { if (this.destination) { this.destination.write(ev.data); } };

4.ajax-progressive.js 或者 ajax.jschunk块状加载,边下边播。或者全部加载完播放

5.ts.jsJSMpeg.Demuxer.TS

TS.prototype.connect = function(streamId, destination) { this.pesPacketInfo[streamId] = { destination: destination, currentLength: 0, totalLength: 0, pts: 0, buffers: [] }; };

解码过程中用到了this.bits = new JSMpeg.BitBuffer(totalLength);

6.buffer.js

var BitBuffer = function(bufferOrLength, mode) { }

7.decode.js

JSMpeg.Decoder.Base = (function(){ "use strict"; var BaseDecoder = function(options) { this.destination = null; this.canPlay = false; this.collectTimestamps = !options.streaming; this.timestamps = []; this.timestampIndex = 0; this.startTime = 0; this.decodedTime = 0; Object.defineProperty(this, 'currentTime', {get: this.getCurrentTime}); };

8.mpeg1.js

JSMpeg.Decoder.MPEG1Video = (function(){ "use strict"; // Inspired by Java MPEG-1 Video Decoder and Player by Zoltan Korandi // https://sourceforge.net/projects/javampeg1video/ var MPEG1 = function(options) { JSMpeg.Decoder.Base.call(this, options); var bufferSize = options.videoBufferSize || 512*1024; var bufferMode = options.streaming ? JSMpeg.BitBuffer.MODE.EVICT : JSMpeg.BitBuffer.MODE.EXPAND; this.bits = new JSMpeg.BitBuffer(bufferSize, bufferMode); this.customIntraQuantMatrix = new Uint8Array(64); this.customNonIntraQuantMatrix = new Uint8Array(64); this.blockData = new Int32Array(64); this.currentFrame = 0; this.decodeFirstFrame = options.decodeFirstFrame !== false; }; MPEG1.PICTURE_TYPE = { INTRA: 1, PREDICTIVE: 2, B: 3 };

这个就是I帧,P帧,B帧

9.webgl.js

JSMpeg.Renderer.WebGL = (function(){ "use strict"; var WebGLRenderer = function(options) { this.canvas = options.canvas || document.createElement('canvas'); this.width = this.canvas.width; this.height = this.canvas.height; this.enabled = true;

10.canvas2d.js

JSMpeg.Renderer.Canvas2D = (function(){ "use strict"; var CanvasRenderer = function(options) { this.canvas = options.canvas || document.createElement('canvas'); this.width = this.canvas.width; this.height = this.canvas.height; this.enabled = true; this.context = this.canvas.getContext('2d'); };

以下两个声音部分11.mp2.js

JSMpeg.Decoder.MP2Audio = (function(){ "use strict"; // Based on kjmp2 by Martin J. Fiedler // http://keyj.emphy.de/kjmp2/ var MP2 = function(options) {

12.webaudio.js

JSMpeg.AudioOutput.WebAudio = (function() { "use strict"; var WebAudioOut = function(options) { }

13.video-element.js添加一些元素

this.canvas = document.createElement('canvas'); this.canvas.width = 960; this.canvas.height = 540; addStyles(this.canvas, { display: 'block', width: '100%' }); this.container.appendChild(this.canvas);

end



【本文地址】

公司简介

联系我们

今日新闻

    推荐新闻

    专题文章
      CopyRight 2018-2019 实验室设备网 版权所有